[ "dropped"
, case afile of
AssociatedFile Nothing -> serializeKey key
- AssociatedFile (Just af) -> fromRawFilePath af
+ AssociatedFile (Just af) -> fromOsPath af
, "(from " ++ maybe "here" show u ++ ")"
, "(copies now " ++ show (have - 1) ++ ")"
, ": " ++ reason
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import qualified System.FilePath.Posix.ByteString as Posix
-import qualified System.FilePath.ByteString as P
import qualified Data.ByteArray.Encoding as BA
{- Configures how to build an import tree. -}
let subtreeref = Ref $
fromRef' finaltree
<> ":"
- <> getTopFilePath dir
+ <> fromOsPath (getTopFilePath dir)
in fromMaybe emptyTree
<$> inRepo (Git.Ref.tree subtreeref)
updateexportdb importedtree
lf = fromImportLocation loc
treepath = asTopFilePath lf
topf = asTopFilePath $
- maybe lf (\sd -> getTopFilePath sd P.</> lf) msubdir
+ maybe lf (\sd -> getTopFilePath sd </> lf) msubdir
mklink k = do
relf <- fromRepo $ fromTopFilePath topf
symlink <- calcRepo $ gitAnnexLink relf k
- linksha <- hashSymlink symlink
+ linksha <- hashSymlink (fromOsPath symlink)
return $ TreeItem treepath (fromTreeItemType TreeSymlink) linksha
mkpointer k = TreeItem treepath (fromTreeItemType TreeFile)
<$> hashPointerFile k
-- Full directory prefix where the sub tree is located.
let fullprefix = asTopFilePath $ case msubdir of
Nothing -> subdir
- Just d -> getTopFilePath d Posix.</> subdir
+ Just d -> toOsPath $
+ fromOsPath (getTopFilePath d) Posix.</> fromOsPath subdir
Tree ts <- converttree (Just fullprefix) $
map (\(p, i) -> (mkImportLocation p, i))
(importableContentsSubTree c)
let af = AssociatedFile (Just f)
let downloader p' tmpfile = do
_ <- Remote.retrieveExportWithContentIdentifier
- ia loc [cid] (fromRawFilePath tmpfile)
+ ia loc [cid] tmpfile
(Left k)
(combineMeterUpdate p' p)
ok <- moveAnnex k af tmpfile
doimportsmall cidmap loc cid sz p = do
let downloader tmpfile = do
(k, _) <- Remote.retrieveExportWithContentIdentifier
- ia loc [cid] (fromRawFilePath tmpfile)
+ ia loc [cid] tmpfile
(Right (mkkey tmpfile))
p
case keyGitSha k of
let af = AssociatedFile (Just f)
let downloader tmpfile p = do
(k, _) <- Remote.retrieveExportWithContentIdentifier
- ia loc [cid] (fromRawFilePath tmpfile)
+ ia loc [cid] tmpfile
(Right (mkkey tmpfile))
p
case keyGitSha k of
case importtreeconfig of
ImportTree -> fromImportLocation loc
ImportSubTree subdir _ ->
- getTopFilePath subdir P.</> fromImportLocation loc
+ getTopFilePath subdir </> fromImportLocation loc
getcidkey cidmap db cid = liftIO $
-- Avoiding querying the database when it's empty speeds up
isknown <||> (matches <&&> notignored)
where
-- Checks, from least to most expensive.
- ingitdir = ".git" `elem` Posix.splitDirectories (fromImportLocation loc)
+ ingitdir = ".git" `elem` Posix.splitDirectories (fromOsPath (fromImportLocation loc))
matches = matchesImportLocation matcher loc sz
isknown = isKnownImportLocation dbhandle loc
notignored = notIgnoredImportLocation importtreeconfig ci loc
where
f = case importtreeconfig of
ImportSubTree dir _ ->
- getTopFilePath dir P.</> fromImportLocation loc
+ getTopFilePath dir </> fromImportLocation loc
ImportTree ->
fromImportLocation loc
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as B8
import qualified Data.Map.Strict as M
-import qualified System.FilePath.ByteString as P
import qualified Data.Set as S
run :: [String] -> IO ()
else downloadManifestOrFail rmt
l <- forM (inManifest manifest) $ \k -> do
b <- downloadGitBundle rmt k
- heads <- inRepo $ Git.Bundle.listHeads b
+ let b' = fromOsPath b
+ heads <- inRepo $ Git.Bundle.listHeads b'
-- Get all the objects from the bundle. This is done here
-- so that the tracking refs can be updated with what is
-- listed, and so what when a full repush is done, all
-- objects are available to be pushed.
when forpush $
- inRepo $ Git.Bundle.unbundle b
+ inRepo $ Git.Bundle.unbundle b'
-- The bundle may contain tracking refs, or regular refs,
-- make sure we're operating on regular refs.
return $ map (\(s, r) -> (fromTrackingRef rmt r, s)) heads
fetch' st rmt = do
manifest <- maybe (downloadManifestOrFail rmt) pure (manifestCache st)
forM_ (inManifest manifest) $ \k ->
- downloadGitBundle rmt k >>= inRepo . Git.Bundle.unbundle
+ downloadGitBundle rmt k
+ >>= inRepo . Git.Bundle.unbundle . fromOsPath
-- Newline indicates end of fetch.
liftIO $ do
putStrLn ""
resolveSpecialRemoteWebUrl url
| "http://" `isPrefixOf` lcurl || "https://" `isPrefixOf` lcurl =
Url.withUrlOptionsPromptingCreds $ \uo ->
- withTmpFile (toOsPath "git-remote-annex") $ \tmp h -> do
+ withTmpFile (literalOsPath "git-remote-annex") $ \tmp h -> do
liftIO $ hClose h
- let tmp' = fromRawFilePath $ fromOsPath tmp
- Url.download' nullMeterUpdate Nothing url tmp' uo >>= \case
+ Url.download' nullMeterUpdate Nothing url tmp uo >>= \case
Left err -> giveup $ url ++ " " ++ err
Right () -> liftIO $
fmap decodeBS
-- it needs to re-download it fresh every time, and the object
-- file should not be stored locally.
gettotmp dl = withOtherTmp $ \othertmp ->
- withTmpFileIn (toOsPath othertmp) (toOsPath "GITMANIFEST") $ \tmp tmph -> do
+ withTmpFileIn othertmp (literalOsPath "GITMANIFEST") $ \tmp tmph -> do
liftIO $ hClose tmph
- _ <- dl (fromRawFilePath (fromOsPath tmp))
+ _ <- dl tmp
b <- liftIO (F.readFile' tmp)
case parseManifest b of
Right m -> Just <$> verifyManifest rmt m
dropKey' rmt mk
put mk
- put mk = withTmpFile (toOsPath "GITMANIFEST") $ \tmp tmph -> do
+ put mk = withTmpFile (literalOsPath "GITMANIFEST") $ \tmp tmph -> do
liftIO $ B8.hPut tmph (formatManifest manifest)
liftIO $ hClose tmph
-- Uploading needs the key to be in the annex objects
-- keys, which it is not.
objfile <- calcRepo (gitAnnexLocation mk)
modifyContentDir objfile $
- linkOrCopy mk (fromOsPath tmp) objfile Nothing >>= \case
+ linkOrCopy mk tmp objfile Nothing >>= \case
-- Important to set the right perms even
-- though the object is only present
-- briefly, since sending objects may rely
-- on or even copy file perms.
Just _ -> do
- liftIO $ R.setFileMode objfile
+ liftIO $ R.setFileMode (fromOsPath objfile)
=<< defaultFileMode
freezeContent objfile
Nothing -> uploadfailed
- interrupted before updating the manifest on the remote, or when a race
- causes the uploaded manigest to be overwritten.
-}
-lastPushedManifestFile :: UUID -> Git.Repo -> RawFilePath
-lastPushedManifestFile u r = gitAnnexDir r P.</> "git-remote-annex"
- P.</> fromUUID u P.</> "manifest"
+lastPushedManifestFile :: UUID -> Git.Repo -> OsPath
+lastPushedManifestFile u r = gitAnnexDir r
+ </> literalOsPath "git-remote-annex"
+ </> fromUUID u
+ </> literalOsPath "manifest"
{- Call before uploading anything. The returned manifest has added
- to it any bundle keys that were in the lastPushedManifestFile
f <- fromRepo (lastPushedManifestFile (Remote.uuid rmt))
oldmanifest <- liftIO $
fromRight mempty . parseManifest
- <$> F.readFile' (toOsPath f)
+ <$> F.readFile' f
`catchNonAsync` (const (pure mempty))
let oldmanifest' = mkManifest [] $
S.fromList (inManifest oldmanifest)
-- and so more things pulled from it, etc.
-- 3. Git bundle objects are not usually transferred between repositories
-- except special remotes (although the user can if they want to).
-downloadGitBundle :: Remote -> Key -> Annex FilePath
+downloadGitBundle :: Remote -> Key -> Annex OsPath
downloadGitBundle rmt k = getKeyExportLocations rmt k >>= \case
Nothing -> dlwith $
download rmt k (AssociatedFile Nothing) stdRetry noNotification
anyM getexport locs
where
dlwith a = ifM a
- ( decodeBS <$> calcRepo (gitAnnexLocation k)
+ ( calcRepo (gitAnnexLocation k)
, giveup $ "Failed to download " ++ serializeKey k
)
getexport' loc =
getViaTmp rsp vc k (AssociatedFile Nothing) Nothing $ \tmp -> do
v <- Remote.retrieveExport (Remote.exportActions rmt)
- k loc (decodeBS tmp) nullMeterUpdate
+ k loc tmp nullMeterUpdate
return (True, v)
rsp = Remote.retrievalSecurityPolicy rmt
vc = Remote.RemoteVerify rmt
uploadGitObject :: Remote -> Key -> Annex ()
uploadGitObject rmt k = getKeyExportLocations rmt k >>= \case
Just (loc:_) -> do
- objfile <- fromRawFilePath <$> calcRepo (gitAnnexLocation k)
+ objfile <- calcRepo (gitAnnexLocation k)
Remote.storeExport (Remote.exportActions rmt) objfile k loc nullMeterUpdate
_ ->
unlessM (upload rmt k (AssociatedFile Nothing) retry noNotification) $
-> Manifest
-> Annex (Key, Annex ())
generateGitBundle rmt bs manifest =
- withTmpFile (toOsPath "GITBUNDLE") $ \tmp tmph -> do
- let tmp' = fromOsPath tmp
+ withTmpFile (literalOsPath "GITBUNDLE") $ \tmp tmph -> do
liftIO $ hClose tmph
- inRepo $ Git.Bundle.create (fromRawFilePath tmp') bs
+ inRepo $ Git.Bundle.create (fromOsPath tmp) bs
bundlekey <- genGitBundleKey (Remote.uuid rmt)
- tmp' nullMeterUpdate
+ tmp nullMeterUpdate
if (bundlekey `notElem` inManifest manifest)
then do
- unlessM (moveAnnex bundlekey (AssociatedFile Nothing) tmp') $
+ unlessM (moveAnnex bundlekey (AssociatedFile Nothing) tmp) $
giveup "Unable to push"
return (bundlekey, uploadaction bundlekey)
else return (bundlekey, noop)
keyExportLocations :: Remote -> Key -> GitConfig -> UUID -> Maybe [ExportLocation]
keyExportLocations rmt k cfg uuid
| exportTree (Remote.config rmt) || importTree (Remote.config rmt) =
- Just $ map (\p -> mkExportLocation (".git" P.</> p)) $
+ Just $ map (\p -> mkExportLocation (literalOsPath ".git" </> p)) $
concatMap (`annexLocationsBare` k) cfgs
| otherwise = Nothing
where
Nothing -> fixup <$> Git.CurrentRepo.get
where
fixup r@(Repo { location = loc@(Local { worktree = Just _ }) }) =
- r { location = loc { worktree = Just (P.takeDirectory (gitdir loc)) } }
+ r { location = loc { worktree = Just (takeDirectory (gitdir loc)) } }
fixup r = r
-- Records what the git-annex branch was at the beginning of this command.
-- journal writes to a temporary directory, so that all writes
-- to the git-annex branch by the action will be discarded.
specialRemoteFromUrl :: StartAnnexBranch -> Annex a -> Annex a
-specialRemoteFromUrl sab a = withTmpDir (toOsPath "journal") $ \tmpdir -> do
+specialRemoteFromUrl sab a = withTmpDir (literalOsPath "journal") $ \tmpdir -> do
Annex.overrideGitConfig $ \c ->
c { annexAlwaysCommit = False }
Annex.BranchState.changeState $ \st ->
- st { alternateJournal = Just (toRawFilePath tmpdir) }
+ st { alternateJournal = Just tmpdir }
a `finally` cleanupInitialization sab tmpdir
-- If the git-annex branch did not exist when this command started,
-- involve checking out an adjusted branch. But git clone wants to do its
-- own checkout. So no initialization is done then, and the git bundle
-- objects are deleted.
-cleanupInitialization :: StartAnnexBranch -> FilePath -> Annex ()
+cleanupInitialization :: StartAnnexBranch -> OsPath -> Annex ()
cleanupInitialization sab alternatejournaldir = void $ tryNonAsync $ do
- liftIO $ mapM_ R.removeLink
- =<< dirContents (toRawFilePath alternatejournaldir)
+ liftIO $ mapM_ removeFile =<< dirContents alternatejournaldir
case sab of
AnnexBranchExistedAlready _ -> noop
AnnexBranchCreatedEmpty r ->
whenM ((r ==) <$> Annex.Branch.getBranch) $ do
indexfile <- fromRepo gitAnnexIndex
- liftIO $ removeWhenExistsWith R.removeLink indexfile
+ liftIO $ removeWhenExistsWith removeFile indexfile
-- When cloning failed and this is being
-- run as an exception is thrown, HEAD will
-- not be set to a valid value, which will
forM_ ks $ \k -> case fromKey keyVariety k of
GitBundleKey -> lockContentForRemoval k noop removeAnnex
_ -> noop
- void $ liftIO $ tryIO $ removeDirectory (decodeBS annexobjectdir)
+ void $ liftIO $ tryIO $ removeDirectory annexobjectdir
notcrippledfilesystem = not <$> probeCrippledFileSystem
Command.Sync.prepMerge
Command.Add.seek Command.Add.AddOptions
- { Command.Add.addThese = Command.Sync.contentOfOption o
+ { Command.Add.addThese = map fromOsPath $
+ Command.Sync.contentOfOption o
, Command.Add.batchOption = NoBatch
, Command.Add.updateOnly = False
, Command.Add.largeFilesOverride = Nothing
seek o@(LocalImportOptions {}) = startConcurrency commandStages $ do
repopath <- liftIO . absPath =<< fromRepo Git.repoPath
inrepops <- liftIO $ filter (dirContains repopath)
- <$> mapM (absPath . toRawFilePath) (importFiles o)
+ <$> mapM (absPath . toOsPath) (importFiles o)
unless (null inrepops) $ do
qp <- coreQuotePath <$> Annex.getGitConfig
giveup $ decodeBS $ quote qp $
giveup "That remote does not support imports."
subdir <- maybe
(pure Nothing)
- (Just <$$> inRepo . toTopFilePath . toRawFilePath)
+ (Just <$$> inRepo . toTopFilePath . toOsPath)
(importToSubDir o)
addunlockedmatcher <- addUnlockedMatcher
seekRemote r (importToBranch o) subdir (importContent o)
addunlockedmatcher
(messageOption o)
-startLocal :: ImportOptions -> AddUnlockedMatcher -> GetFileMatcher -> DuplicateMode -> (RawFilePath, RawFilePath) -> CommandStart
+startLocal :: ImportOptions -> AddUnlockedMatcher -> GetFileMatcher -> DuplicateMode -> (OsPath, OsPath) -> CommandStart
startLocal o addunlockedmatcher largematcher mode (srcfile, destfile) =
- ifM (liftIO $ isRegularFile <$> R.getSymbolicLinkStatus srcfile)
+ ifM (liftIO $ isRegularFile <$> R.getSymbolicLinkStatus (fromOsPath srcfile))
( starting "import" ai si pickaction
, stop
)
showNote $ UnquotedString $ "duplicate of " ++ serializeKey k
verifyExisting k destfile
( do
- liftIO $ R.removeLink srcfile
+ liftIO $ removeFile srcfile
next $ return True
, do
warning "Could not verify that the content is still present in the annex; not removing from the import location."
warning $ "not importing " <> QuotedPath destfile <> " which is .gitignored (use --no-check-gitignore to override)"
stop
else do
- existing <- liftIO (catchMaybeIO $ R.getSymbolicLinkStatus destfile)
+ existing <- liftIO (catchMaybeIO $ R.getSymbolicLinkStatus (fromOsPath destfile))
case existing of
Nothing -> importfilechecked ld k
Just s
| isDirectory s -> notoverwriting "(is a directory)"
| isSymbolicLink s -> ifM (Annex.getRead Annex.force)
( do
- liftIO $ removeWhenExistsWith R.removeLink destfile
+ liftIO $ removeWhenExistsWith removeFile destfile
importfilechecked ld k
, notoverwriting "(is a symlink)"
)
| otherwise -> ifM (Annex.getRead Annex.force)
( do
- liftIO $ removeWhenExistsWith R.removeLink destfile
+ liftIO $ removeWhenExistsWith removeFile destfile
importfilechecked ld k
, notoverwriting "(use --force to override, or a duplication option such as --deduplicate to clean up)"
)
checkdestdir cont = do
let destdir = parentDir destfile
- existing <- liftIO (catchMaybeIO $ R.getSymbolicLinkStatus destdir)
+ existing <- liftIO (catchMaybeIO $ R.getSymbolicLinkStatus (fromOsPath destdir))
case existing of
Nothing -> cont
Just s
createWorkTreeDirectory (parentDir destfile)
unwind <- liftIO $ if mode == Duplicate || mode == SkipDuplicates
then do
- void $ copyFileExternal CopyAllMetaData
- (fromRawFilePath srcfile)
- (fromRawFilePath destfile)
- return $ removeWhenExistsWith R.removeLink destfile
+ void $ copyFileExternal CopyAllMetaData srcfile destfile
+ return $ removeWhenExistsWith removeFile destfile
else do
moveFile srcfile destfile
return $ moveFile destfile srcfile
-- weakly the same as the originally locked down file's
-- inode cache. (Since the file may have been copied,
-- its inodes may not be the same.)
- s <- liftIO $ R.getSymbolicLinkStatus destfile
+ s <- liftIO $ R.getSymbolicLinkStatus (fromOsPath destfile)
newcache <- withTSDelta $ \d -> liftIO $ toInodeCache d destfile s
let unchanged = case (newcache, inodeCache (keySource ld)) of
(_, Nothing) -> True
-- the file gets copied into the repository.
, checkWritePerms = False
}
- v <- lockDown cfg (fromRawFilePath srcfile)
+ v <- lockDown cfg srcfile
case v of
Just ld -> do
backend <- chooseBackend destfile
showNote (s <> "; skipping")
next (return True)
-verifyExisting :: Key -> RawFilePath -> (CommandPerform, CommandPerform) -> CommandPerform
+verifyExisting :: Key -> OsPath -> (CommandPerform, CommandPerform) -> CommandPerform
verifyExisting key destfile (yes, no) = do
-- Look up the numcopies setting for the file that it would be
-- imported to, if it were imported.
module Command.PostReceive where
+import Common
import Command
import qualified Annex
import Annex.UpdateInstead
fixPostReceiveHookEnv = do
g <- Annex.gitRepo
case location g of
- Local { gitdir = ".", worktree = Just "." } ->
+ l@(Local {}) | gitdir l == literalOsPath "." && worktree l == Just (literalOsPath ".") ->
Annex.adjustGitRepo $ \g' -> pure $ g'
{ location = case location g' of
loc@(Local {}) -> loc
- { worktree = Just ".." }
+ { worktree = Just (literalOsPath "..") }
loc -> loc
}
_ -> noop
-
, pushOption :: Bool
, contentOption :: Maybe Bool
, noContentOption :: Maybe Bool
- , contentOfOption :: [FilePath]
+ , contentOfOption :: [OsPath]
, cleanupOption :: Bool
, keyOptions :: Maybe KeyOptions
, resolveMergeOverride :: Bool
<> short 'g'
<> help "do not transfer annexed file contents"
)))
- <*> many (strOption
+ <*> many (stringToOsPath <$> strOption
( long "content-of"
<> short 'C'
<> help "transfer contents of annexed files in a given location"
<*> pure (pushOption v)
<*> pure (contentOption v)
<*> pure (noContentOption v)
- <*> liftIO (mapM (fromRawFilePath <$$> absPath . toRawFilePath) (contentOfOption v))
+ <*> liftIO (mapM absPath (contentOfOption v))
<*> pure (cleanupOption v)
<*> pure (keyOptions v)
<*> pure (resolveMergeOverride v)
- of the repo. This also means that sync always acts on all files in the
- repository, not just on a subdirectory. -}
prepMerge :: Annex ()
-prepMerge = Annex.changeDirectory . fromRawFilePath =<< fromRepo Git.repoPath
+prepMerge = Annex.changeDirectory =<< fromRepo Git.repoPath
mergeConfig :: Bool -> Annex [Git.Merge.MergeConfig]
mergeConfig mergeunrelated = do
Nothing -> return True
Just wt -> ifM needemulation
( gitAnnexChildProcess "post-receive" []
- (\cp -> cp { cwd = Just (fromRawFilePath wt) })
+ (\cp -> cp { cwd = Just (fromOsPath wt) })
(\_ _ _ pid -> waitForProcess pid >>= return . \case
ExitSuccess -> True
_ -> False
)
_ -> case currbranch of
(Just origbranch, Just adj) | adjustmentHidesFiles adj -> do
- l <- workTreeItems' (AllowHidden True) ww (contentOfOption o)
+ l <- workTreeItems' (AllowHidden True) ww
+ (map fromOsPath (contentOfOption o))
seekincludinghidden origbranch mvar l (const noop)
pure Nothing
_ -> do
- l <- workTreeItems ww (contentOfOption o)
+ l <- workTreeItems ww
+ (map fromOsPath (contentOfOption o))
seekworktree mvar l (const noop)
pure Nothing
waitForAllRunningCommandActions
mtree <- inRepo $ Git.Ref.tree b
let addsubdir = case snd (splitRemoteAnnexTrackingBranchSubdir b) of
Just subdir -> \cb -> Git.Ref $
- Git.fromRef' cb <> ":" <> getTopFilePath subdir
+ Git.fromRef' cb <> ":" <> fromOsPath (getTopFilePath subdir)
Nothing -> id
mcurrtree <- maybe (pure Nothing)
(inRepo . Git.Ref.tree . addsubdir)
-- importable keys, so avoids needing to buffer all
-- the rest of the files in memory.
in case ThirdPartyPopulated.importKey' loc reqsz of
- Just k -> (fromOsPath loc, (borgContentIdentifier, retsz k))
+ Just k -> (loc, (borgContentIdentifier, retsz k))
: parsefilelist archivename rest
Nothing -> parsefilelist archivename rest
parsefilelist _ _ = []
-- last imported tree. And the contents of those archives can be retrieved
-- by listing the subtree recursively, which will likely be quite a lot
-- faster than running borg.
-getImported :: UUID -> Annex (M.Map BorgArchiveName (Annex [(RawFilePath, (ContentIdentifier, ByteSize))]))
+getImported :: UUID -> Annex (M.Map BorgArchiveName (Annex [(OsPath, (ContentIdentifier, ByteSize))]))
getImported u = M.unions <$> (mapM go . exportedTreeishes =<< getExport u)
where
go t = M.fromList . mapMaybe mk
mkImportLocation $ getTopFilePath $ LsTree.file ti
k <- fileKey (takeFileName f)
return
- ( fromOsPath (genImportLocation f)
+ ( genImportLocation f
,
( borgContentIdentifier
-- defaulting to 0 size is ok, this size
- of the main tree. Nested subtrees are not allowed. -}
data ImportableContentsChunk m info = ImportableContentsChunk
{ importableContentsSubDir :: ImportChunkSubDir
- , importableContentsSubTree :: [(RawFilePath, info)]
+ , importableContentsSubTree :: [(OsPath, info)]
-- ^ locations are relative to importableContentsSubDir
, importableContentsNextChunk :: m (Maybe (ImportableContentsChunk m info))
-- ^ Continuation to get the next chunk.